import pymaid
source = pymaid.CatmaidInstance('https://fafb.catmaid.virtualflybrain.org/' , api_token=None) #'9944b09199c62bcf9418ad846dd0e4bbdfc6ee4b'
INFO : Global CATMAID instance set. Caching is ON. (pymaid)
neurons = pymaid.get_annotated("Paper: Zheng et al 2018") # Paper of interest
INFO : Searching for: Paper: Zheng et al 2018 (pymaid)
neurons
| id | name | type | skeleton_ids | |
|---|---|---|---|---|
| 0 | 17 | Uniglomerular mALT VA6 adPN 017 DB | neuron | [16] |
| 1 | 7287 | KCa'B'ap 7287 EAM | neuron | [7286] |
| 2 | 7647 | KCa'B'm 7647 LK | neuron | [7646] |
| 3 | 9579 | KCy 9579 EAM | neuron | [9578] |
| 4 | 11743 | KCa'B'm 11743 BAK | neuron | [11742] |
| ... | ... | ... | ... | ... |
| 140 | 1775707 | Uniglomerular mALT DL1 adPN 22153 JMR | neuron | [1775706] |
| 141 | 1785035 | Uniglomerular mALT DA2 lPN 57399 LK | neuron | [1785034] |
| 142 | 3104791 | KCy 16943 BH | neuron | [3104789] |
| 143 | 3445526 | KCy 6487 ML-NS | neuron | [3445525] |
| 144 | 14003784 | Uniglomerular mALT VP5 + SEZ adPN 57472 LK ECM | neuron | [14003783] |
145 rows × 4 columns
neuron_ids = [j for j in neurons["skeleton_ids"]] # Get neuron IDs
old = neuron_ids
neuron_ids = []
for i in range(len(old)):
neuron_ids.append(old[i][0])
connectome = pymaid.adjacency_matrix(neuron_ids)
connectome = connectome.to_numpy()
from graspologic.plot import heatmap
heatmap(connectome)
<AxesSubplot:>
pymaid.adjacency_from_connectors is meant to generate an adjacency matrix from data with repeated skeleton ids. Our IDs do not repeat so this was skipped.
pymaid.get_edges(neuron_ids)
| source | target | weight | |
|---|---|---|---|
| 0 | 203840 | 16 | 13 |
| 1 | 203840 | 7286 | 4 |
| 2 | 203840 | 7646 | 11 |
| 3 | 203840 | 9578 | 20 |
| 4 | 203840 | 11742 | 6 |
| ... | ... | ... | ... |
| 826 | 57385 | 57418 | 1 |
| 827 | 57385 | 60445 | 1 |
| 828 | 57385 | 1785034 | 1 |
| 829 | 46105 | 57503 | 2 |
| 830 | 46105 | 14003783 | 1 |
831 rows × 3 columns
len(connectome[connectome != 0])
831
pymaid.get_partners(neuron_ids)
INFO : Fetching connectivity table for 145 neurons (pymaid)
--------------------------------------------------------------------------- HTTPError Traceback (most recent call last) <ipython-input-8-a21d9a28a1fc> in <module> ----> 1 pymaid.get_partners(neuron_ids) ~/anaconda3/envs/NDD/lib/python3.7/site-packages/pymaid/cache.py in wrapper(*args, **kwargs) 314 try: 315 # Execute function --> 316 res = function(*args, **kwargs) 317 except BaseException: 318 # If error was raised, remove new entries from cache ~/anaconda3/envs/NDD/lib/python3.7/site-packages/pymaid/fetch.py in get_partners(x, threshold, min_size, filt, min_confidence, directions, remote_instance) 757 # Get neurons' names 758 names = get_names([n for d in connectivity_data for n in connectivity_data[ --> 759 d]] + list(x), remote_instance=remote_instance) 760 761 df = pd.DataFrame(columns=['neuron_name', 'skeleton_id', ~/anaconda3/envs/NDD/lib/python3.7/site-packages/pymaid/cache.py in wrapper(*args, **kwargs) 314 try: 315 # Execute function --> 316 res = function(*args, **kwargs) 317 except BaseException: 318 # If error was raised, remove new entries from cache ~/anaconda3/envs/NDD/lib/python3.7/site-packages/pymaid/fetch.py in get_names(x, remote_instance) 851 get_names_postdata[key] = x[i] 852 --> 853 names = remote_instance.fetch(remote_get_names_url, post=get_names_postdata) 854 855 logger.debug('Names for {} of {} skeleton IDs retrieved'.format(len(names), ~/anaconda3/envs/NDD/lib/python3.7/site-packages/pymaid/client.py in fetch(self, url, post, files, on_error, desc, disable_pbar, leave_pbar, return_type) 472 if on_error == 'raise': 473 raise HTTPError('{} errors encountered: {}'.format(len(errors), --> 474 '\n'.join(errors))) 475 else: 476 for e, d in zip(errors, details): HTTPError: 1 errors encountered: 413 Server Error: Request Entity Too Large for url: https://fafb.catmaid.virtualflybrain.org/1/skeleton/neuronnames
# DO NOT DELETE THIS: IT EXPORTS FILE TO HTML
!jupyter nbconvert --to html pymaid_import.ipynb
[NbConvertApp] Converting notebook pymaid_import.ipynb to html [NbConvertApp] Writing 583988 bytes to pymaid_import.html